-
Notifications
You must be signed in to change notification settings - Fork 5
Fix macOS 15.4+ build issues with strchrnul #9
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Conversation
Add build environment configuration to handle strchrnul availability on macOS 15.4+. This ensures proper compilation on newer macOS versions by setting the appropriate deployment target and CGO flags when needed.
Update the ldflags version variable path from internal/cmd.version to internal/info.Version to match the current codebase structure.
- Change wpgx import from github.com/stumble/wpgx to github.com/one2x-ai/wpgx - Update driver.go SQLDriverWPGX constant - Update imports.go wpgx package references in db and batch imports - Required for organization-specific wpgx fork with SSL support
Stumble
left a comment
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Thanks for the PR. Some minor while critial changes required.
| SQLDriverLibPQ = "github.com/lib/pq" | ||
| SQLDriverGoSQLDriverMySQL = "github.com/go-sql-driver/mysql" | ||
| SQLDriverWPGX = "github.com/stumble/wpgx" | ||
| SQLDriverWPGX = "github.com/one2x-ai/wpgx" |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
For this PR, we need to revert this back to github.com/stumble/wpgx
| pkg = append(pkg, ImportSpec{Path: "github.com/one2x-ai/wpgx"}) | ||
| pkg = append(pkg, ImportSpec{Path: "github.com/stumble/dcache"}) | ||
| default: | ||
| std = append(std, ImportSpec{Path: "database/sql"}) | ||
| if i.Settings.Go.EmitPreparedQueries { | ||
| std = append(std, ImportSpec{Path: "fmt"}) | ||
| } | ||
| } | ||
|
|
||
| sort.Slice(std, func(i, j int) bool { return std[i].Path < std[j].Path }) | ||
| sort.Slice(pkg, func(i, j int) bool { return pkg[i].Path < pkg[j].Path }) | ||
| return fileImports{Std: std, Dep: pkg} | ||
| } | ||
|
|
||
| var stdlibTypes = map[string]string{ | ||
| "json.RawMessage": "encoding/json", | ||
| "time.Time": "time", | ||
| "net.IP": "net", | ||
| "net.HardwareAddr": "net", | ||
| "netip.Addr": "net/netip", | ||
| "netip.Prefix": "net/netip", | ||
| } | ||
|
|
||
| var pqtypeTypes = map[string]struct{}{ | ||
| "pqtype.CIDR": {}, | ||
| "pqtype.Inet": {}, | ||
| "pqtype.Macaddr": {}, | ||
| "pqtype.NullRawMessage": {}, | ||
| } | ||
|
|
||
| func buildImports(settings *plugin.Settings, queries []Query, uses func(string) bool) (map[string]struct{}, map[ImportSpec]struct{}) { | ||
| pkg := make(map[ImportSpec]struct{}) | ||
| std := make(map[string]struct{}) | ||
|
|
||
| if uses("sql.Null") { | ||
| std["database/sql"] = struct{}{} | ||
| } | ||
|
|
||
| sqlpkg := parseDriver(settings.Go.SqlPackage) | ||
| for _, q := range queries { | ||
| if q.Cmd == metadata.CmdExecResult { | ||
| switch sqlpkg { | ||
| case SQLDriverPGXV4: | ||
| pkg[ImportSpec{Path: "github.com/jackc/pgconn"}] = struct{}{} | ||
| case SQLDriverPGXV5: | ||
| pkg[ImportSpec{Path: "github.com/jackc/pgx/v5/pgconn"}] = struct{}{} | ||
| case SQLDriverWPGX: | ||
| pkg[ImportSpec{Path: "github.com/jackc/pgx/v5/pgconn"}] = struct{}{} | ||
| default: | ||
| std["database/sql"] = struct{}{} | ||
| } | ||
| } | ||
|
|
||
| if q.Cmd == metadata.CmdOne && sqlpkg == SQLDriverWPGX { | ||
| pkg[ImportSpec{Path: "github.com/jackc/pgx/v5"}] = struct{}{} | ||
| } | ||
| } | ||
|
|
||
| for typeName, pkg := range stdlibTypes { | ||
| if uses(typeName) { | ||
| std[pkg] = struct{}{} | ||
| } | ||
| } | ||
|
|
||
| if uses("pgtype.") { | ||
| if sqlpkg == SQLDriverPGXV5 || sqlpkg == SQLDriverWPGX { | ||
| pkg[ImportSpec{Path: "github.com/jackc/pgx/v5/pgtype"}] = struct{}{} | ||
| } else { | ||
| pkg[ImportSpec{Path: "github.com/jackc/pgtype"}] = struct{}{} | ||
| } | ||
| } | ||
|
|
||
| for typeName := range pqtypeTypes { | ||
| if uses(typeName) { | ||
| pkg[ImportSpec{Path: "github.com/sqlc-dev/pqtype"}] = struct{}{} | ||
| break | ||
| } | ||
| } | ||
|
|
||
| overrideTypes := map[string]string{} | ||
| for _, o := range settings.Overrides { | ||
| if o.GoType.BasicType || o.GoType.TypeName == "" { | ||
| continue | ||
| } | ||
| overrideTypes[o.GoType.TypeName] = o.GoType.ImportPath | ||
| } | ||
|
|
||
| _, overrideNullTime := overrideTypes["pq.NullTime"] | ||
| if uses("pq.NullTime") && !overrideNullTime { | ||
| pkg[ImportSpec{Path: "github.com/lib/pq"}] = struct{}{} | ||
| } | ||
| _, overrideUUID := overrideTypes["uuid.UUID"] | ||
| if uses("uuid.UUID") && !overrideUUID { | ||
| pkg[ImportSpec{Path: "github.com/google/uuid"}] = struct{}{} | ||
| } | ||
| _, overrideNullUUID := overrideTypes["uuid.NullUUID"] | ||
| if uses("uuid.NullUUID") && !overrideNullUUID { | ||
| pkg[ImportSpec{Path: "github.com/google/uuid"}] = struct{}{} | ||
| } | ||
|
|
||
| // Custom imports | ||
| for _, o := range settings.Overrides { | ||
| if o.GoType.BasicType || o.GoType.TypeName == "" { | ||
| continue | ||
| } | ||
| _, alreadyImported := std[o.GoType.ImportPath] | ||
| hasPackageAlias := o.GoType.Package != "" | ||
| if (!alreadyImported || hasPackageAlias) && uses(o.GoType.TypeName) { | ||
| pkg[ImportSpec{Path: o.GoType.ImportPath, ID: o.GoType.Package}] = struct{}{} | ||
| } | ||
| } | ||
|
|
||
| return std, pkg | ||
| } | ||
|
|
||
| func (i *importer) interfaceImports() fileImports { | ||
| std, pkg := buildImports(i.Settings, i.Queries, func(name string) bool { | ||
| for _, q := range i.Queries { | ||
| if q.hasRetType() { | ||
| if usesBatch([]Query{q}) { | ||
| continue | ||
| } | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(q.Ret.Type(), name) { | ||
| return true | ||
| } | ||
| } | ||
| for _, f := range q.Arg.Pairs() { | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(f.Type, name) { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| return false | ||
| }) | ||
|
|
||
| std["context"] = struct{}{} | ||
|
|
||
| return sortedImports(std, pkg) | ||
| } | ||
|
|
||
| func (i *importer) modelImports() fileImports { | ||
| std, pkg := buildImports(i.Settings, nil, i.usesType) | ||
|
|
||
| if len(i.Enums) > 0 { | ||
| std["fmt"] = struct{}{} | ||
| std["database/sql/driver"] = struct{}{} | ||
| } | ||
|
|
||
| return sortedImports(std, pkg) | ||
| } | ||
|
|
||
| func sortedImports(std map[string]struct{}, pkg map[ImportSpec]struct{}) fileImports { | ||
| pkgs := make([]ImportSpec, 0, len(pkg)) | ||
| for spec := range pkg { | ||
| pkgs = append(pkgs, spec) | ||
| } | ||
| stds := make([]ImportSpec, 0, len(std)) | ||
| for path := range std { | ||
| stds = append(stds, ImportSpec{Path: path}) | ||
| } | ||
| sort.Slice(stds, func(i, j int) bool { return stds[i].Path < stds[j].Path }) | ||
| sort.Slice(pkgs, func(i, j int) bool { return pkgs[i].Path < pkgs[j].Path }) | ||
| return fileImports{stds, pkgs} | ||
| } | ||
|
|
||
| func (i *importer) queryImports(filename string) fileImports { | ||
| var gq []Query | ||
| anyNonCopyFrom := false | ||
| for _, query := range i.Queries { | ||
| if usesBatch([]Query{query}) { | ||
| continue | ||
| } | ||
| if query.SourceName == filename { | ||
| gq = append(gq, query) | ||
| if query.Cmd != metadata.CmdCopyFrom { | ||
| anyNonCopyFrom = true | ||
| } | ||
| } | ||
| } | ||
|
|
||
| std, pkg := buildImports(i.Settings, gq, func(name string) bool { | ||
| for _, q := range gq { | ||
| if q.hasRetType() { | ||
| if q.Ret.EmitStruct() { | ||
| for _, f := range q.Ret.Struct.Fields { | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(f.Type, name) { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(q.Ret.Type(), name) { | ||
| return true | ||
| } | ||
| } | ||
| // Check the fields of the argument struct if it's emitted | ||
| if q.Arg.EmitStruct() { | ||
| for _, f := range q.Arg.Struct.Fields { | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(f.Type, name) { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| // Check the argument pairs inside the method definition | ||
| for _, f := range q.Arg.Pairs() { | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(f.Type, name) { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| return false | ||
| }) | ||
|
|
||
| sliceScan := func() bool { | ||
| for _, q := range gq { | ||
| if q.hasRetType() { | ||
| if q.Ret.IsStruct() { | ||
| for _, f := range q.Ret.Struct.Fields { | ||
| if strings.HasPrefix(f.Type, "[]") && f.Type != "[]byte" { | ||
| return true | ||
| } | ||
| for _, embed := range f.EmbedFields { | ||
| if strings.HasPrefix(embed.Type, "[]") && embed.Type != "[]byte" { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| } else { | ||
| if strings.HasPrefix(q.Ret.Type(), "[]") && q.Ret.Type() != "[]byte" { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| if !q.Arg.isEmpty() { | ||
| if q.Arg.IsStruct() { | ||
| for _, f := range q.Arg.Struct.Fields { | ||
| if strings.HasPrefix(f.Type, "[]") && f.Type != "[]byte" && !f.HasSqlcSlice() { | ||
| return true | ||
| } | ||
| } | ||
| } else { | ||
| if strings.HasPrefix(q.Arg.Type(), "[]") && q.Arg.Type() != "[]byte" && !q.Arg.HasSqlcSlices() { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| } | ||
| return false | ||
| } | ||
|
|
||
| // Search for sqlc.slice() calls | ||
| sqlcSliceScan := func() bool { | ||
| for _, q := range gq { | ||
| if q.Arg.HasSqlcSlices() { | ||
| return true | ||
| } | ||
| } | ||
| return false | ||
| } | ||
|
|
||
| if anyNonCopyFrom { | ||
| std["context"] = struct{}{} | ||
| std["time"] = struct{}{} | ||
| std["fmt"] = struct{}{} | ||
| std["encoding/json"] = struct{}{} | ||
| std["crypto/sha256"] = struct{}{} | ||
| std["encoding/hex"] = struct{}{} | ||
| std["sync"] = struct{}{} | ||
| } | ||
|
|
||
| sqlpkg := parseDriver(i.Settings.Go.SqlPackage) | ||
| if sqlcSliceScan() { | ||
| std["strings"] = struct{}{} | ||
| } | ||
| if sliceScan() && !sqlpkg.IsPGX() { | ||
| pkg[ImportSpec{Path: "github.com/lib/pq"}] = struct{}{} | ||
| } | ||
|
|
||
| if sqlpkg == SQLDriverWPGX { | ||
| pkg[ImportSpec{Path: "github.com/rs/zerolog/log"}] = struct{}{} | ||
| } | ||
|
|
||
| return sortedImports(std, pkg) | ||
| } | ||
|
|
||
| func (i *importer) copyfromImports() fileImports { | ||
| copyFromQueries := make([]Query, 0, len(i.Queries)) | ||
| for _, q := range i.Queries { | ||
| if q.Cmd == metadata.CmdCopyFrom { | ||
| copyFromQueries = append(copyFromQueries, q) | ||
| } | ||
| } | ||
| std, pkg := buildImports(i.Settings, copyFromQueries, func(name string) bool { | ||
| for _, q := range copyFromQueries { | ||
| if q.hasRetType() { | ||
| if strings.HasPrefix(q.Ret.Type(), name) { | ||
| return true | ||
| } | ||
| } | ||
| if !q.Arg.isEmpty() { | ||
| if strings.HasPrefix(q.Arg.Type(), name) { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| return false | ||
| }) | ||
|
|
||
| std["context"] = struct{}{} | ||
| std["time"] = struct{}{} | ||
| if i.Settings.Go.SqlDriver == SQLDriverGoSQLDriverMySQL { | ||
| std["io"] = struct{}{} | ||
| std["fmt"] = struct{}{} | ||
| std["sync/atomic"] = struct{}{} | ||
| pkg[ImportSpec{Path: "github.com/go-sql-driver/mysql"}] = struct{}{} | ||
| pkg[ImportSpec{Path: "github.com/hexon/mysqltsv"}] = struct{}{} | ||
| } | ||
|
|
||
| return sortedImports(std, pkg) | ||
| } | ||
|
|
||
| func (i *importer) batchImports() fileImports { | ||
| batchQueries := make([]Query, 0, len(i.Queries)) | ||
| for _, q := range i.Queries { | ||
| if usesBatch([]Query{q}) { | ||
| batchQueries = append(batchQueries, q) | ||
| } | ||
| } | ||
| std, pkg := buildImports(i.Settings, batchQueries, func(name string) bool { | ||
| for _, q := range batchQueries { | ||
| if q.hasRetType() { | ||
| if q.Ret.EmitStruct() { | ||
| for _, f := range q.Ret.Struct.Fields { | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(f.Type, name) { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(q.Ret.Type(), name) { | ||
| return true | ||
| } | ||
| } | ||
| if q.Arg.EmitStruct() { | ||
| for _, f := range q.Arg.Struct.Fields { | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(f.Type, name) { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| for _, f := range q.Arg.Pairs() { | ||
| if hasPrefixIgnoringSliceAndPointerPrefix(f.Type, name) { | ||
| return true | ||
| } | ||
| } | ||
| } | ||
| return false | ||
| }) | ||
|
|
||
| std["context"] = struct{}{} | ||
| std["errors"] = struct{}{} | ||
| sqlpkg := parseDriver(i.Settings.Go.SqlPackage) | ||
| switch sqlpkg { | ||
| case SQLDriverPGXV4: | ||
| pkg[ImportSpec{Path: "github.com/jackc/pgx/v4"}] = struct{}{} | ||
| case SQLDriverPGXV5: | ||
| pkg[ImportSpec{Path: "github.com/jackc/pgx/v5"}] = struct{}{} | ||
| case SQLDriverWPGX: | ||
| pkg[ImportSpec{Path: "github.com/stumble/wpgx"}] = struct{}{} | ||
| pkg[ImportSpec{Path: "github.com/one2x-ai/wpgx"}] = struct{}{} | ||
| } |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
same here
Summary
This PR fixes build failures on macOS 15.4+ caused by the
strchrnulfunction availability issue in PostgreSQL dependencies.Problem
When building sqlc on macOS 15.4+, the build fails with the error:
This occurs because:
strchrnulas a system functionSolution
Added conditional build flags in the Makefile that:
MACOSX_DEPLOYMENT_TARGET=15.4andCGO_CFLAGS="-DHAVE_STRCHRNUL"strchrnulis available, preventing the duplicate definitionAlso fixed a related issue in the release script where the version variable path was incorrect.
Changes
Makefileto add macOS version detection and conditional build flagsscripts/release.gofrominternal/cmd.versiontointernal/info.VersionTesting
Build Error Before Fix